In [2]:
# Shane Khalid 10/17/2023

Stock Prediction Deep Learning Model¶

Stock/Asset prices are time-series data and so I am implementing a LSTM (Long Short-Term Memory) which is a type of RNN (Recurrent Neural Network) that can remember information over a long period of time.¶
Import libraries. I normally use yfinance for this, but I have a clean dataset loading in¶
In [1]:
import numpy as np
import pandas as pd
import tensorflow as tf

import plotly.express as px
import statsmodels.api as sm
import matplotlib.pyplot as plt
import plotly.graph_objects as go

from tensorflow import keras

from keras.layers import GRU, Dropout, SimpleRNN, LSTM, Dense, SimpleRNN, GRU
from keras.models import Sequential
from sklearn.preprocessing import MinMaxScaler
print("Num GPUs Available: ", len(tf.config.experimental.list_physical_devices('GPU')))
2023-10-18 16:12:11.476119: E tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:9342] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
2023-10-18 16:12:11.476178: E tensorflow/compiler/xla/stream_executor/cuda/cuda_fft.cc:609] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
2023-10-18 16:12:11.477641: E tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:1518] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
2023-10-18 16:12:11.610707: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
Num GPUs Available:  1
2023-10-18 16:12:15.660173: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node
Your kernel may have been built without NUMA support.
2023-10-18 16:12:15.687115: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node
Your kernel may have been built without NUMA support.
2023-10-18 16:12:15.687162: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node
Your kernel may have been built without NUMA support.
Load Data (Google stock)¶
In [2]:
google_training_complete = pd.read_csv("./Google_Stock_Train (2010-2022).csv")

# Convert 'Date' column to datetime format
google_training_complete['Date'] = pd.to_datetime(google_training_complete['Date'])

google_training_complete.head(10)
Out[2]:
Date Open High Low Close Adj Close Volume
0 2010-01-04 15.689439 15.753504 15.621622 15.684434 15.684434 78169752
1 2010-01-05 15.695195 15.711712 15.554054 15.615365 15.615365 120067812
2 2010-01-06 15.662162 15.662162 15.174174 15.221722 15.221722 158988852
3 2010-01-07 15.250250 15.265265 14.831081 14.867367 14.867367 256315428
4 2010-01-08 14.814815 15.096346 14.742492 15.065566 15.065566 188783028
5 2010-01-11 15.126627 15.126627 14.865866 15.042793 15.042793 288227484
6 2010-01-12 14.956206 14.968969 14.714715 14.776777 14.776777 193937868
7 2010-01-13 14.426677 14.724224 14.361862 14.691942 14.691942 259604136
8 2010-01-14 14.612112 14.869870 14.584835 14.761011 14.761011 169434396
9 2010-01-15 14.848348 14.853854 14.465465 14.514515 14.514515 217162620

Exploratory Data Analysis (EDA)¶

In [3]:
# Line chart for Google stock price over time
fig1 = px.line(google_training_complete, x='Date', y='Close', title='Google Stock Price Over Time')
fig1.show()
/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning: The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result
  v = v.dt.to_pydatetime()
In [4]:
# Scatter plot of daily trading volume
fig2 = px.scatter(google_training_complete, x='Date', y='Volume', title='Daily Trading Volume')
fig2.show()
/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning:

The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result

In [5]:
# Box plot of Google stock prices for each year
google_training_complete['Year'] = google_training_complete['Date'].dt.year
fig3 = px.box(google_training_complete, x='Year', y='Close', title='Google Stock Prices - Yearly Box Plot')
fig3.show()
In [6]:
# Candlestick chart for stock prices
fig4 = go.Figure(data=[go.Candlestick(x=google_training_complete['Date'],
                                     open=google_training_complete['Open'],
                                     high=google_training_complete['High'],
                                     low=google_training_complete['Low'],
                                     close=google_training_complete['Close'])])

fig4.update_layout(title='Google Stock Prices - Candlestick Chart',
                   xaxis_title='Date',
                   yaxis_title='Stock Price')

fig4.show()
/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning:

The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result

In [7]:
# Histogram of daily returns
google_training_complete['Daily_Return'] = google_training_complete['Close'].pct_change()
fig5 = px.histogram(google_training_complete, x='Daily_Return', nbins=30, title='Distribution of Daily Returns')
fig5.show()
In [9]:
# Heatmap of correlation matrix
correlation_matrix = google_training_complete.corr()
fig6 = px.imshow(correlation_matrix, x=correlation_matrix.index, y=correlation_matrix.columns, title='Correlation Matrix Heatmap')
fig6.show()
In [10]:
# Moving Average of closing prices
google_training_complete['MA_50'] = google_training_complete['Close'].rolling(window=50).mean()
fig7 = px.line(google_training_complete, x='Date', y=['Close', 'MA_50'], title='Google Stock Close Price with 50-Day Moving Average')
fig7.show()
/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning:

The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result

In [11]:
# Scatter plot of closing prices vs. trading volume
fig8 = px.scatter(google_training_complete, x='Close', y='Volume', title='Closing Prices vs. Trading Volume')
fig8.show()
In [12]:
# Line chart for daily stock price change
google_training_complete['Daily_Change'] = google_training_complete['Close'].diff()
fig9 = px.line(google_training_complete, x='Date', y='Daily_Change', title='Daily Stock Price Change')
fig9.show()
/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning:

The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result

In [13]:
# Bar chart of trading volume per month
google_training_complete['Month'] = google_training_complete['Date'].dt.month
monthly_volume = google_training_complete.groupby('Month')['Volume'].sum().reset_index()
fig10 = px.bar(monthly_volume, x='Month', y='Volume', title='Total Trading Volume per Month')
fig10.show()
In [14]:
# Area chart for daily trading volume
fig11 = px.area(google_training_complete, x='Date', y='Volume', title='Daily Trading Volume (Area Chart)')
fig11.show()
/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning:

The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result

In [15]:
# Histogram of daily stock price fluctuations
fig12 = px.histogram(google_training_complete, x='Daily_Change', title='Distribution of Daily Stock Price Changes')
fig12.show()
In [16]:
# Line chart for percent change of daily stock price
google_training_complete['Daily_Pct_Change'] = google_training_complete['Close'].pct_change() * 100
fig13 = px.line(google_training_complete, x='Date', y='Daily_Pct_Change', title='Daily Stock Price Percent Change')
fig13.show()
/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning:

The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result

In [17]:
# Box plot of daily stock price percent change by month
fig14 = px.box(google_training_complete, x='Month', y='Daily_Pct_Change', title='Daily Stock Price Percent Change by Month')
fig14.show()
In [18]:
# Scatter plot of closing prices with Trend Line (should have used differnt color)
fig15 = px.scatter(google_training_complete, x='Date', y='Close', title='Google Stock Prices with Trendline', trendline='lowess')
fig15.show()
/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning:

The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result

/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning:

The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result

In [19]:
# Line chart for daily trading volume with moving average
google_training_complete['MA_Volume'] = google_training_complete['Volume'].rolling(window=10).mean()
fig16 = px.line(google_training_complete, x='Date', y=['Volume', 'MA_Volume'], title='Daily Trading Volume with 10-Day Moving Average')
fig16.show()
/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning:

The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result

In [20]:
# Box plot of daily stock price changes by year 
google_training_complete['Year'] = google_training_complete['Date'].dt.year
fig17 = px.box(google_training_complete, x='Year', y='Daily_Change', title='Daily Stock Price Changes by Year')
fig17.show()
In [21]:
# Line chart for daily closing prices in 2022
# 2022 because Train dataset is 2010-2022
df_2022 = google_training_complete[google_training_complete['Year'] == 2022]
fig18 = px.line(google_training_complete, x='Date', y='Close', title='Google Stock Prices in 2022')
fig18.show()
/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning:

The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result

In [22]:
# Scatter plot of daily returns vs. trading volume
fig19 = px.scatter(google_training_complete, x='Daily_Return', y='Volume', title='Daily Returns vs. Trading Volume')
fig19.show()
In [23]:
# Line chart for daily trading volume with range slider
fig20 = px.line(google_training_complete, x='Date', y='Volume', title='Daily Trading Volume with Range Slider')
fig20.update_xaxes(rangeslider_visible=True)
fig20.show()
/home/shane/.local/lib/python3.10/site-packages/_plotly_utils/basevalidators.py:105: FutureWarning:

The behavior of DatetimeProperties.to_pydatetime is deprecated, in a future version this will return a Series containing python datetime objects instead of an ndarray. To retain the old behavior, call `np.array` on the result

Data preprocessing¶

In [24]:
google_training_processed = google_training_complete.iloc[:, 4:5].values
google_training_processed
Out[24]:
array([[15.684434],
       [15.615365],
       [15.221722],
       ...,
       [86.019997],
       [88.449997],
       [88.230003]])
In [25]:
scaler = MinMaxScaler(feature_range = (0, 1))

google_training_scaled = scaler.fit_transform(google_training_processed)
In [26]:
print(google_training_scaled)
print(google_training_scaled.shape)
[[0.03434761]
 [0.03385045]
 [0.03101697]
 ...
 [0.54062898]
 [0.55812033]
 [0.55653679]]
(3272, 1)
In [27]:
features_set = []
labels = []
for i in range(60, google_training_scaled.shape[0]):
    features_set.append(google_training_scaled[i-60:i, 0])
    labels.append(google_training_scaled[i, 0])
In [28]:
type(features_set)
Out[28]:
list
In [29]:
features_set, labels = np.array(features_set), np.array(labels)
In [30]:
features_set = np.reshape(features_set, (features_set.shape[0], features_set.shape[1], 1))
print(features_set.shape)
(3212, 60, 1)

LSTM layers work on 3D data with the following structure (nb_sequence, nb_timestep, nb_feature).¶

- nb_sequence : total number of sequences in dataset
- nb_timestep : size of sequences
- nb_feature : number of features describing timesteps 

RNN¶

Build RNN Model¶

Pretty generic/standard RNN¶
In [31]:
RNN_model = Sequential()
RNN_model.add(SimpleRNN(units=300, return_sequences=True, input_shape=(features_set.shape[1], 1)))
RNN_model.add(Dropout(0.2))

RNN_model.add(SimpleRNN(units=100, return_sequences=True))
RNN_model.add(Dropout(0.2))

RNN_model.add(SimpleRNN(units=100, return_sequences=True))
RNN_model.add(Dropout(0.2))

RNN_model.add(SimpleRNN(units=100))
RNN_model.add(Dropout(0.2))

RNN_model.add(Dense(units = 1))
2023-10-18 16:14:36.082137: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node
Your kernel may have been built without NUMA support.
2023-10-18 16:14:36.082241: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node
Your kernel may have been built without NUMA support.
2023-10-18 16:14:36.082271: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node
Your kernel may have been built without NUMA support.
2023-10-18 16:14:36.468842: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node
Your kernel may have been built without NUMA support.
2023-10-18 16:14:36.468912: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node
Your kernel may have been built without NUMA support.
2023-10-18 16:14:36.468924: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1977] Could not identify NUMA node of platform GPU id 0, defaulting to 0.  Your kernel may not have been built with NUMA support.
2023-10-18 16:14:36.468972: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node
Your kernel may have been built without NUMA support.
2023-10-18 16:14:36.469000: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1886] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 21765 MB memory:  -> device: 0, name: NVIDIA GeForce RTX 3090 Ti, pci bus id: 0000:43:00.0, compute capability: 8.6
2023-10-18 16:14:40.842066: I tensorflow/tsl/platform/default/subprocess.cc:304] Start cannot spawn child process: No such file or directory
Compile RNN Model using Adam optimizer¶
In [32]:
RNN_model.compile(optimizer = 'adam', loss = 'mean_squared_error')
In [33]:
RNN_model.summary()
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 simple_rnn (SimpleRNN)      (None, 60, 300)           90600     
                                                                 
 dropout (Dropout)           (None, 60, 300)           0         
                                                                 
 simple_rnn_1 (SimpleRNN)    (None, 60, 100)           40100     
                                                                 
 dropout_1 (Dropout)         (None, 60, 100)           0         
                                                                 
 simple_rnn_2 (SimpleRNN)    (None, 60, 100)           20100     
                                                                 
 dropout_2 (Dropout)         (None, 60, 100)           0         
                                                                 
 simple_rnn_3 (SimpleRNN)    (None, 100)               20100     
                                                                 
 dropout_3 (Dropout)         (None, 100)               0         
                                                                 
 dense (Dense)               (None, 1)                 101       
                                                                 
=================================================================
Total params: 171001 (667.97 KB)
Trainable params: 171001 (667.97 KB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________

Fit RNN Model¶

In [40]:
# Finally got the GPU backend to work
# batch sizes of 32 led to a capped 24% GPU usage but raising it to 3200 made the GPU usage >50% and it proccessed the epochs lightning fast
RNN_History = RNN_model.fit(features_set, labels, epochs = 300, batch_size = 3200)
Epoch 1/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0494
Epoch 2/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0424
Epoch 3/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0404
Epoch 4/300
2/2 [==============================] - 1s 382ms/step - loss: 0.0321
Epoch 5/300
2/2 [==============================] - 1s 398ms/step - loss: 0.0377
Epoch 6/300
2/2 [==============================] - 1s 389ms/step - loss: 0.0249
Epoch 7/300
2/2 [==============================] - 1s 397ms/step - loss: 0.0131
Epoch 8/300
2/2 [==============================] - 1s 356ms/step - loss: 0.0131
Epoch 9/300
2/2 [==============================] - 1s 361ms/step - loss: 0.0112
Epoch 10/300
2/2 [==============================] - 1s 351ms/step - loss: 0.0152
Epoch 11/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0427
Epoch 12/300
2/2 [==============================] - 1s 337ms/step - loss: 0.0515
Epoch 13/300
2/2 [==============================] - 1s 343ms/step - loss: 0.0295
Epoch 14/300
2/2 [==============================] - 1s 363ms/step - loss: 0.0655
Epoch 15/300
2/2 [==============================] - 1s 379ms/step - loss: 0.0370
Epoch 16/300
2/2 [==============================] - 1s 368ms/step - loss: 0.0411
Epoch 17/300
2/2 [==============================] - 1s 343ms/step - loss: 0.0180
Epoch 18/300
2/2 [==============================] - 1s 339ms/step - loss: 0.0119
Epoch 19/300
2/2 [==============================] - 1s 330ms/step - loss: 0.0128
Epoch 20/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0323
Epoch 21/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0174
Epoch 22/300
2/2 [==============================] - 1s 356ms/step - loss: 0.0158
Epoch 23/300
2/2 [==============================] - 1s 341ms/step - loss: 0.0116
Epoch 24/300
2/2 [==============================] - 1s 341ms/step - loss: 0.0160
Epoch 25/300
2/2 [==============================] - 1s 345ms/step - loss: 0.0121
Epoch 26/300
2/2 [==============================] - 1s 357ms/step - loss: 0.0118
Epoch 27/300
2/2 [==============================] - 1s 354ms/step - loss: 0.0088
Epoch 28/300
2/2 [==============================] - 1s 358ms/step - loss: 0.0080
Epoch 29/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0084
Epoch 30/300
2/2 [==============================] - 1s 358ms/step - loss: 0.0132
Epoch 31/300
2/2 [==============================] - 1s 342ms/step - loss: 0.0290
Epoch 32/300
2/2 [==============================] - 1s 376ms/step - loss: 0.0087
Epoch 33/300
2/2 [==============================] - 1s 367ms/step - loss: 0.0082
Epoch 34/300
2/2 [==============================] - 1s 361ms/step - loss: 0.0092
Epoch 35/300
2/2 [==============================] - 1s 409ms/step - loss: 0.0095
Epoch 36/300
2/2 [==============================] - 1s 342ms/step - loss: 0.0095
Epoch 37/300
2/2 [==============================] - 1s 351ms/step - loss: 0.0086
Epoch 38/300
2/2 [==============================] - 1s 360ms/step - loss: 0.0091
Epoch 39/300
2/2 [==============================] - 1s 356ms/step - loss: 0.0120
Epoch 40/300
2/2 [==============================] - 1s 471ms/step - loss: 0.0129
Epoch 41/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0102
Epoch 42/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0161
Epoch 43/300
2/2 [==============================] - 1s 377ms/step - loss: 0.0084
Epoch 44/300
2/2 [==============================] - 1s 388ms/step - loss: 0.0102
Epoch 45/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0176
Epoch 46/300
2/2 [==============================] - 1s 345ms/step - loss: 0.0232
Epoch 47/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0106
Epoch 48/300
2/2 [==============================] - 1s 354ms/step - loss: 0.0133
Epoch 49/300
2/2 [==============================] - 1s 365ms/step - loss: 0.0066
Epoch 50/300
2/2 [==============================] - 1s 360ms/step - loss: 0.0069
Epoch 51/300
2/2 [==============================] - 1s 352ms/step - loss: 0.0092
Epoch 52/300
2/2 [==============================] - 1s 336ms/step - loss: 0.0135
Epoch 53/300
2/2 [==============================] - 1s 341ms/step - loss: 0.0085
Epoch 54/300
2/2 [==============================] - 1s 335ms/step - loss: 0.0142
Epoch 55/300
2/2 [==============================] - 1s 343ms/step - loss: 0.0080
Epoch 56/300
2/2 [==============================] - 1s 343ms/step - loss: 0.0090
Epoch 57/300
2/2 [==============================] - 1s 350ms/step - loss: 0.0108
Epoch 58/300
2/2 [==============================] - 1s 333ms/step - loss: 0.0069
Epoch 59/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0133
Epoch 60/300
2/2 [==============================] - 1s 337ms/step - loss: 0.0061
Epoch 61/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0063
Epoch 62/300
2/2 [==============================] - 1s 365ms/step - loss: 0.0089
Epoch 63/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0054
Epoch 64/300
2/2 [==============================] - 1s 352ms/step - loss: 0.0072
Epoch 65/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0066
Epoch 66/300
2/2 [==============================] - 1s 331ms/step - loss: 0.0064
Epoch 67/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0086
Epoch 68/300
2/2 [==============================] - 1s 336ms/step - loss: 0.0052
Epoch 69/300
2/2 [==============================] - 1s 338ms/step - loss: 0.0072
Epoch 70/300
2/2 [==============================] - 1s 344ms/step - loss: 0.0086
Epoch 71/300
2/2 [==============================] - 1s 370ms/step - loss: 0.0054
Epoch 72/300
2/2 [==============================] - 1s 355ms/step - loss: 0.0061
Epoch 73/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0062
Epoch 74/300
2/2 [==============================] - 1s 350ms/step - loss: 0.0052
Epoch 75/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0054
Epoch 76/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0054
Epoch 77/300
2/2 [==============================] - 1s 357ms/step - loss: 0.0051
Epoch 78/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0070
Epoch 79/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0046
Epoch 80/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0050
Epoch 81/300
2/2 [==============================] - 1s 344ms/step - loss: 0.0052
Epoch 82/300
2/2 [==============================] - 1s 341ms/step - loss: 0.0050
Epoch 83/300
2/2 [==============================] - 1s 362ms/step - loss: 0.0052
Epoch 84/300
2/2 [==============================] - 1s 342ms/step - loss: 0.0054
Epoch 85/300
2/2 [==============================] - 1s 363ms/step - loss: 0.0049
Epoch 86/300
2/2 [==============================] - 1s 343ms/step - loss: 0.0053
Epoch 87/300
2/2 [==============================] - 1s 341ms/step - loss: 0.0045
Epoch 88/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0054
Epoch 89/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0054
Epoch 90/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0045
Epoch 91/300
2/2 [==============================] - 1s 351ms/step - loss: 0.0060
Epoch 92/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0064
Epoch 93/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0044
Epoch 94/300
2/2 [==============================] - 1s 365ms/step - loss: 0.0061
Epoch 95/300
2/2 [==============================] - 1s 340ms/step - loss: 0.0049
Epoch 96/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0043
Epoch 97/300
2/2 [==============================] - 1s 356ms/step - loss: 0.0042
Epoch 98/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0046
Epoch 99/300
2/2 [==============================] - 1s 367ms/step - loss: 0.0041
Epoch 100/300
2/2 [==============================] - 1s 360ms/step - loss: 0.0045
Epoch 101/300
2/2 [==============================] - 1s 350ms/step - loss: 0.0050
Epoch 102/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0046
Epoch 103/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0043
Epoch 104/300
2/2 [==============================] - 1s 391ms/step - loss: 0.0049
Epoch 105/300
2/2 [==============================] - 1s 358ms/step - loss: 0.0050
Epoch 106/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0041
Epoch 107/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0058
Epoch 108/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0062
Epoch 109/300
2/2 [==============================] - 1s 342ms/step - loss: 0.0040
Epoch 110/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0049
Epoch 111/300
2/2 [==============================] - 1s 344ms/step - loss: 0.0070
Epoch 112/300
2/2 [==============================] - 1s 368ms/step - loss: 0.0046
Epoch 113/300
2/2 [==============================] - 1s 355ms/step - loss: 0.0052
Epoch 114/300
2/2 [==============================] - 1s 356ms/step - loss: 0.0046
Epoch 115/300
2/2 [==============================] - 1s 357ms/step - loss: 0.0043
Epoch 116/300
2/2 [==============================] - 1s 339ms/step - loss: 0.0051
Epoch 117/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0051
Epoch 118/300
2/2 [==============================] - 1s 363ms/step - loss: 0.0045
Epoch 119/300
2/2 [==============================] - 1s 408ms/step - loss: 0.0056
Epoch 120/300
2/2 [==============================] - 1s 363ms/step - loss: 0.0065
Epoch 121/300
2/2 [==============================] - 1s 352ms/step - loss: 0.0062
Epoch 122/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0071
Epoch 123/300
2/2 [==============================] - 1s 341ms/step - loss: 0.0042
Epoch 124/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0085
Epoch 125/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0043
Epoch 126/300
2/2 [==============================] - 1s 350ms/step - loss: 0.0069
Epoch 127/300
2/2 [==============================] - 1s 354ms/step - loss: 0.0052
Epoch 128/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0047
Epoch 129/300
2/2 [==============================] - 1s 356ms/step - loss: 0.0068
Epoch 130/300
2/2 [==============================] - 1s 345ms/step - loss: 0.0036
Epoch 131/300
2/2 [==============================] - 1s 355ms/step - loss: 0.0049
Epoch 132/300
2/2 [==============================] - 1s 360ms/step - loss: 0.0056
Epoch 133/300
2/2 [==============================] - 1s 354ms/step - loss: 0.0045
Epoch 134/300
2/2 [==============================] - 1s 352ms/step - loss: 0.0045
Epoch 135/300
2/2 [==============================] - 1s 408ms/step - loss: 0.0056
Epoch 136/300
2/2 [==============================] - 1s 359ms/step - loss: 0.0040
Epoch 137/300
2/2 [==============================] - 1s 331ms/step - loss: 0.0064
Epoch 138/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0046
Epoch 139/300
2/2 [==============================] - 1s 337ms/step - loss: 0.0051
Epoch 140/300
2/2 [==============================] - 1s 367ms/step - loss: 0.0046
Epoch 141/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0046
Epoch 142/300
2/2 [==============================] - 1s 340ms/step - loss: 0.0046
Epoch 143/300
2/2 [==============================] - 1s 365ms/step - loss: 0.0078
Epoch 144/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0048
Epoch 145/300
2/2 [==============================] - 1s 350ms/step - loss: 0.0038
Epoch 146/300
2/2 [==============================] - 1s 340ms/step - loss: 0.0037
Epoch 147/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0044
Epoch 148/300
2/2 [==============================] - 1s 333ms/step - loss: 0.0046
Epoch 149/300
2/2 [==============================] - 1s 354ms/step - loss: 0.0046
Epoch 150/300
2/2 [==============================] - 1s 352ms/step - loss: 0.0039
Epoch 151/300
2/2 [==============================] - 1s 345ms/step - loss: 0.0040
Epoch 152/300
2/2 [==============================] - 1s 361ms/step - loss: 0.0049
Epoch 153/300
2/2 [==============================] - 1s 334ms/step - loss: 0.0047
Epoch 154/300
2/2 [==============================] - 1s 362ms/step - loss: 0.0038
Epoch 155/300
2/2 [==============================] - 1s 408ms/step - loss: 0.0067
Epoch 156/300
2/2 [==============================] - 1s 338ms/step - loss: 0.0038
Epoch 157/300
2/2 [==============================] - 1s 364ms/step - loss: 0.0044
Epoch 158/300
2/2 [==============================] - 1s 351ms/step - loss: 0.0039
Epoch 159/300
2/2 [==============================] - 1s 332ms/step - loss: 0.0050
Epoch 160/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0042
Epoch 161/300
2/2 [==============================] - 1s 386ms/step - loss: 0.0041
Epoch 162/300
2/2 [==============================] - 1s 357ms/step - loss: 0.0046
Epoch 163/300
2/2 [==============================] - 1s 335ms/step - loss: 0.0038
Epoch 164/300
2/2 [==============================] - 1s 340ms/step - loss: 0.0050
Epoch 165/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0039
Epoch 166/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0042
Epoch 167/300
2/2 [==============================] - 1s 336ms/step - loss: 0.0058
Epoch 168/300
2/2 [==============================] - 1s 340ms/step - loss: 0.0038
Epoch 169/300
2/2 [==============================] - 1s 343ms/step - loss: 0.0050
Epoch 170/300
2/2 [==============================] - 1s 338ms/step - loss: 0.0046
Epoch 171/300
2/2 [==============================] - 1s 344ms/step - loss: 0.0056
Epoch 172/300
2/2 [==============================] - 1s 343ms/step - loss: 0.0040
Epoch 173/300
2/2 [==============================] - 1s 334ms/step - loss: 0.0034
Epoch 174/300
2/2 [==============================] - 1s 339ms/step - loss: 0.0040
Epoch 175/300
2/2 [==============================] - 1s 344ms/step - loss: 0.0038
Epoch 176/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0035
Epoch 177/300
2/2 [==============================] - 1s 343ms/step - loss: 0.0033
Epoch 178/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0037
Epoch 179/300
2/2 [==============================] - 1s 441ms/step - loss: 0.0037
Epoch 180/300
2/2 [==============================] - 1s 336ms/step - loss: 0.0032
Epoch 181/300
2/2 [==============================] - 1s 408ms/step - loss: 0.0038
Epoch 182/300
2/2 [==============================] - 1s 366ms/step - loss: 0.0035
Epoch 183/300
2/2 [==============================] - 1s 351ms/step - loss: 0.0033
Epoch 184/300
2/2 [==============================] - 1s 351ms/step - loss: 0.0035
Epoch 185/300
2/2 [==============================] - 1s 357ms/step - loss: 0.0033
Epoch 186/300
2/2 [==============================] - 1s 380ms/step - loss: 0.0039
Epoch 187/300
2/2 [==============================] - 1s 337ms/step - loss: 0.0039
Epoch 188/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0039
Epoch 189/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0037
Epoch 190/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0050
Epoch 191/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0037
Epoch 192/300
2/2 [==============================] - 1s 352ms/step - loss: 0.0035
Epoch 193/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0040
Epoch 194/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0036
Epoch 195/300
2/2 [==============================] - 1s 368ms/step - loss: 0.0036
Epoch 196/300
2/2 [==============================] - 1s 363ms/step - loss: 0.0037
Epoch 197/300
2/2 [==============================] - 1s 344ms/step - loss: 0.0046
Epoch 198/300
2/2 [==============================] - 1s 352ms/step - loss: 0.0034
Epoch 199/300
2/2 [==============================] - 1s 351ms/step - loss: 0.0033
Epoch 200/300
2/2 [==============================] - 1s 356ms/step - loss: 0.0037
Epoch 201/300
2/2 [==============================] - 1s 345ms/step - loss: 0.0032
Epoch 202/300
2/2 [==============================] - 1s 339ms/step - loss: 0.0032
Epoch 203/300
2/2 [==============================] - 1s 356ms/step - loss: 0.0040
Epoch 204/300
2/2 [==============================] - 1s 331ms/step - loss: 0.0035
Epoch 205/300
2/2 [==============================] - 1s 343ms/step - loss: 0.0032
Epoch 206/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0031
Epoch 207/300
2/2 [==============================] - 1s 355ms/step - loss: 0.0033
Epoch 208/300
2/2 [==============================] - 1s 360ms/step - loss: 0.0032
Epoch 209/300
2/2 [==============================] - 1s 350ms/step - loss: 0.0035
Epoch 210/300
2/2 [==============================] - 1s 334ms/step - loss: 0.0033
Epoch 211/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0032
Epoch 212/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0034
Epoch 213/300
2/2 [==============================] - 1s 357ms/step - loss: 0.0040
Epoch 214/300
2/2 [==============================] - 1s 345ms/step - loss: 0.0034
Epoch 215/300
2/2 [==============================] - 1s 390ms/step - loss: 0.0036
Epoch 216/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0041
Epoch 217/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0035
Epoch 218/300
2/2 [==============================] - 1s 345ms/step - loss: 0.0038
Epoch 219/300
2/2 [==============================] - 1s 370ms/step - loss: 0.0040
Epoch 220/300
2/2 [==============================] - 1s 338ms/step - loss: 0.0036
Epoch 221/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0038
Epoch 222/300
2/2 [==============================] - 1s 342ms/step - loss: 0.0041
Epoch 223/300
2/2 [==============================] - 1s 341ms/step - loss: 0.0033
Epoch 224/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0037
Epoch 225/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0038
Epoch 226/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0034
Epoch 227/300
2/2 [==============================] - 1s 350ms/step - loss: 0.0042
Epoch 228/300
2/2 [==============================] - 1s 345ms/step - loss: 0.0041
Epoch 229/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0032
Epoch 230/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0045
Epoch 231/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0048
Epoch 232/300
2/2 [==============================] - 1s 355ms/step - loss: 0.0033
Epoch 233/300
2/2 [==============================] - 1s 354ms/step - loss: 0.0052
Epoch 234/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0034
Epoch 235/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0056
Epoch 236/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0033
Epoch 237/300
2/2 [==============================] - 1s 337ms/step - loss: 0.0041
Epoch 238/300
2/2 [==============================] - 1s 344ms/step - loss: 0.0033
Epoch 239/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0040
Epoch 240/300
2/2 [==============================] - 1s 359ms/step - loss: 0.0031
Epoch 241/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0040
Epoch 242/300
2/2 [==============================] - 1s 331ms/step - loss: 0.0033
Epoch 243/300
2/2 [==============================] - 1s 350ms/step - loss: 0.0032
Epoch 244/300
2/2 [==============================] - 1s 350ms/step - loss: 0.0032
Epoch 245/300
2/2 [==============================] - 1s 352ms/step - loss: 0.0032
Epoch 246/300
2/2 [==============================] - 1s 340ms/step - loss: 0.0030
Epoch 247/300
2/2 [==============================] - 1s 352ms/step - loss: 0.0032
Epoch 248/300
2/2 [==============================] - 1s 339ms/step - loss: 0.0032
Epoch 249/300
2/2 [==============================] - 1s 341ms/step - loss: 0.0033
Epoch 250/300
2/2 [==============================] - 1s 345ms/step - loss: 0.0032
Epoch 251/300
2/2 [==============================] - 1s 364ms/step - loss: 0.0033
Epoch 252/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0029
Epoch 253/300
2/2 [==============================] - 1s 347ms/step - loss: 0.0032
Epoch 254/300
2/2 [==============================] - 1s 345ms/step - loss: 0.0035
Epoch 255/300
2/2 [==============================] - 1s 374ms/step - loss: 0.0033
Epoch 256/300
2/2 [==============================] - 1s 337ms/step - loss: 0.0041
Epoch 257/300
2/2 [==============================] - 1s 369ms/step - loss: 0.0031
Epoch 258/300
2/2 [==============================] - 1s 351ms/step - loss: 0.0033
Epoch 259/300
2/2 [==============================] - 1s 354ms/step - loss: 0.0046
Epoch 260/300
2/2 [==============================] - 1s 353ms/step - loss: 0.0043
Epoch 261/300
2/2 [==============================] - 1s 350ms/step - loss: 0.0039
Epoch 262/300
2/2 [==============================] - 1s 354ms/step - loss: 0.0031
Epoch 263/300
2/2 [==============================] - 1s 339ms/step - loss: 0.0031
Epoch 264/300
2/2 [==============================] - 1s 339ms/step - loss: 0.0076
Epoch 265/300
2/2 [==============================] - 1s 356ms/step - loss: 0.0032
Epoch 266/300
2/2 [==============================] - 1s 358ms/step - loss: 0.0077
Epoch 267/300
2/2 [==============================] - 1s 361ms/step - loss: 0.0044
Epoch 268/300
2/2 [==============================] - 1s 344ms/step - loss: 0.0041
Epoch 269/300
2/2 [==============================] - 1s 384ms/step - loss: 0.0063
Epoch 270/300
2/2 [==============================] - 1s 389ms/step - loss: 0.0033
Epoch 271/300
2/2 [==============================] - 1s 352ms/step - loss: 0.0051
Epoch 272/300
2/2 [==============================] - 1s 356ms/step - loss: 0.0033
Epoch 273/300
2/2 [==============================] - 1s 349ms/step - loss: 0.0034
Epoch 274/300
2/2 [==============================] - 1s 385ms/step - loss: 0.0030
Epoch 275/300
2/2 [==============================] - 1s 336ms/step - loss: 0.0033
Epoch 276/300
2/2 [==============================] - 1s 358ms/step - loss: 0.0031
Epoch 277/300
2/2 [==============================] - 1s 362ms/step - loss: 0.0039
Epoch 278/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0031
Epoch 279/300
2/2 [==============================] - 1s 357ms/step - loss: 0.0035
Epoch 280/300
2/2 [==============================] - 1s 331ms/step - loss: 0.0077
Epoch 281/300
2/2 [==============================] - 1s 378ms/step - loss: 0.0029
Epoch 282/300
2/2 [==============================] - 1s 378ms/step - loss: 0.0042
Epoch 283/300
2/2 [==============================] - 1s 360ms/step - loss: 0.0050
Epoch 284/300
2/2 [==============================] - 1s 348ms/step - loss: 0.0033
Epoch 285/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0031
Epoch 286/300
2/2 [==============================] - 1s 346ms/step - loss: 0.0040
Epoch 287/300
2/2 [==============================] - 1s 354ms/step - loss: 0.0035
Epoch 288/300
2/2 [==============================] - 1s 368ms/step - loss: 0.0045
Epoch 289/300
2/2 [==============================] - 1s 359ms/step - loss: 0.0032
Epoch 290/300
2/2 [==============================] - 1s 372ms/step - loss: 0.0045
Epoch 291/300
2/2 [==============================] - 1s 369ms/step - loss: 0.0033
Epoch 292/300
2/2 [==============================] - 1s 361ms/step - loss: 0.0030
Epoch 293/300
2/2 [==============================] - 1s 403ms/step - loss: 0.0041
Epoch 294/300
2/2 [==============================] - 1s 362ms/step - loss: 0.0034
Epoch 295/300
2/2 [==============================] - 1s 337ms/step - loss: 0.0032
Epoch 296/300
2/2 [==============================] - 1s 352ms/step - loss: 0.0031
Epoch 297/300
2/2 [==============================] - 1s 351ms/step - loss: 0.0034
Epoch 298/300
2/2 [==============================] - 1s 338ms/step - loss: 0.0039
Epoch 299/300
2/2 [==============================] - 1s 341ms/step - loss: 0.0030
Epoch 300/300
2/2 [==============================] - 1s 364ms/step - loss: 0.0032

The thing with RNNs are that they are limited by vanishing or exploding gradients. This is whey we can use LSTM and/or GRU¶

LSTM¶

This works by passing info to the input gate, the output coming from the output gate, and the forget gate that trashes info we don't need.¶

Build LSTM Model¶

In [41]:
LSTM_model = Sequential()
LSTM_model.add(LSTM(units=300, return_sequences=True, input_shape=(features_set.shape[1], 1)))
LSTM_model.add(Dropout(0.2))

LSTM_model.add(LSTM(units=100, return_sequences=True))
LSTM_model.add(Dropout(0.2))

LSTM_model.add(LSTM(units=100, return_sequences=True))
LSTM_model.add(Dropout(0.2))

LSTM_model.add(LSTM(units=100))
LSTM_model.add(Dropout(0.2))

LSTM_model.add(Dense(units = 1))

Compile LSTM Model using Adam Optimizer¶

In [42]:
LSTM_model.compile(optimizer = 'adam', loss = 'mean_squared_error')
In [43]:
LSTM_model.summary()
Model: "sequential_1"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 lstm (LSTM)                 (None, 60, 300)           362400    
                                                                 
 dropout_4 (Dropout)         (None, 60, 300)           0         
                                                                 
 lstm_1 (LSTM)               (None, 60, 100)           160400    
                                                                 
 dropout_5 (Dropout)         (None, 60, 100)           0         
                                                                 
 lstm_2 (LSTM)               (None, 60, 100)           80400     
                                                                 
 dropout_6 (Dropout)         (None, 60, 100)           0         
                                                                 
 lstm_3 (LSTM)               (None, 100)               80400     
                                                                 
 dropout_7 (Dropout)         (None, 100)               0         
                                                                 
 dense_1 (Dense)             (None, 1)                 101       
                                                                 
=================================================================
Total params: 683701 (2.61 MB)
Trainable params: 683701 (2.61 MB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________

Fit LSTM Model¶

In [44]:
LSTM_History = LSTM_model.fit(features_set, labels, epochs = 300, batch_size = 3200)
Epoch 1/300
2/2 [==============================] - 5s 44ms/step - loss: 0.1446
Epoch 2/300
2/2 [==============================] - 0s 36ms/step - loss: 0.0072
Epoch 3/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0060
Epoch 4/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0517
Epoch 5/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0714
Epoch 6/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0684
Epoch 7/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0553
Epoch 8/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0322
Epoch 9/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0137
Epoch 10/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0319
Epoch 11/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0149
Epoch 12/300
2/2 [==============================] - 0s 39ms/step - loss: 0.0085
Epoch 13/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0112
Epoch 14/300
2/2 [==============================] - 0s 40ms/step - loss: 0.0117
Epoch 15/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0068
Epoch 16/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0017
Epoch 17/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0050
Epoch 18/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0049
Epoch 19/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0024
Epoch 20/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0037
Epoch 21/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0036
Epoch 22/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0019
Epoch 23/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0024
Epoch 24/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0021
Epoch 25/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0019
Epoch 26/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0030
Epoch 27/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0025
Epoch 28/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0016
Epoch 29/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0018
Epoch 30/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0016
Epoch 31/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0020
Epoch 32/300
2/2 [==============================] - 0s 41ms/step - loss: 0.0021
Epoch 33/300
2/2 [==============================] - 0s 35ms/step - loss: 0.0015
Epoch 34/300
2/2 [==============================] - 0s 36ms/step - loss: 0.0021
Epoch 35/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0019
Epoch 36/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0016
Epoch 37/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0019
Epoch 38/300
2/2 [==============================] - 0s 39ms/step - loss: 0.0019
Epoch 39/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0015
Epoch 40/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0021
Epoch 41/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0021
Epoch 42/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0015
Epoch 43/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0023
Epoch 44/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0028
Epoch 45/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0019
Epoch 46/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0016
Epoch 47/300
2/2 [==============================] - 0s 44ms/step - loss: 0.0016
Epoch 48/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0014
Epoch 49/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0015
Epoch 50/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0016
Epoch 51/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0014
Epoch 52/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0014
Epoch 53/300
2/2 [==============================] - 0s 35ms/step - loss: 0.0016
Epoch 54/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0028
Epoch 55/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0021
Epoch 56/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0014
Epoch 57/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0026
Epoch 58/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0025
Epoch 59/300
2/2 [==============================] - 0s 64ms/step - loss: 0.0014
Epoch 60/300
2/2 [==============================] - 0s 50ms/step - loss: 0.0018
Epoch 61/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0023
Epoch 62/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0015
Epoch 63/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0013
Epoch 64/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0017
Epoch 65/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0015
Epoch 66/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0013
Epoch 67/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0015
Epoch 68/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0014
Epoch 69/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0015
Epoch 70/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0014
Epoch 71/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0014
Epoch 72/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0013
Epoch 73/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0013
Epoch 74/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0014
Epoch 75/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0014
Epoch 76/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0013
Epoch 77/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0013
Epoch 78/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0013
Epoch 79/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0013
Epoch 80/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0015
Epoch 81/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0013
Epoch 82/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0024
Epoch 83/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0020
Epoch 84/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0013
Epoch 85/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0014
Epoch 86/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0014
Epoch 87/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0021
Epoch 88/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0017
Epoch 89/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0014
Epoch 90/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0017
Epoch 91/300
2/2 [==============================] - 0s 66ms/step - loss: 0.0013
Epoch 92/300
2/2 [==============================] - 0s 69ms/step - loss: 0.0030
Epoch 93/300
2/2 [==============================] - 0s 69ms/step - loss: 0.0027
Epoch 94/300
2/2 [==============================] - 0s 67ms/step - loss: 0.0014
Epoch 95/300
2/2 [==============================] - 0s 75ms/step - loss: 0.0026
Epoch 96/300
2/2 [==============================] - 0s 129ms/step - loss: 0.0027
Epoch 97/300
2/2 [==============================] - 0s 55ms/step - loss: 0.0013
Epoch 98/300
2/2 [==============================] - 0s 51ms/step - loss: 0.0017
Epoch 99/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0013
Epoch 100/300
2/2 [==============================] - 0s 90ms/step - loss: 0.0024
Epoch 101/300
2/2 [==============================] - 0s 40ms/step - loss: 0.0023
Epoch 102/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0013
Epoch 103/300
2/2 [==============================] - 0s 44ms/step - loss: 0.0015
Epoch 104/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0014
Epoch 105/300
2/2 [==============================] - 0s 36ms/step - loss: 0.0013
Epoch 106/300
2/2 [==============================] - 0s 68ms/step - loss: 0.0014
Epoch 107/300
2/2 [==============================] - 0s 67ms/step - loss: 0.0019
Epoch 108/300
2/2 [==============================] - 0s 49ms/step - loss: 0.0017
Epoch 109/300
2/2 [==============================] - 0s 59ms/step - loss: 0.0012
Epoch 110/300
2/2 [==============================] - 0s 68ms/step - loss: 0.0017
Epoch 111/300
2/2 [==============================] - 0s 62ms/step - loss: 0.0014
Epoch 112/300
2/2 [==============================] - 0s 40ms/step - loss: 0.0011
Epoch 113/300
2/2 [==============================] - 0s 85ms/step - loss: 0.0012
Epoch 114/300
2/2 [==============================] - 0s 39ms/step - loss: 0.0014
Epoch 115/300
2/2 [==============================] - 0s 80ms/step - loss: 0.0012
Epoch 116/300
2/2 [==============================] - 0s 66ms/step - loss: 0.0012
Epoch 117/300
2/2 [==============================] - 0s 80ms/step - loss: 0.0011
Epoch 118/300
2/2 [==============================] - 0s 36ms/step - loss: 0.0012
Epoch 119/300
2/2 [==============================] - 0s 59ms/step - loss: 0.0012
Epoch 120/300
2/2 [==============================] - 0s 61ms/step - loss: 0.0012
Epoch 121/300
2/2 [==============================] - 0s 85ms/step - loss: 0.0016
Epoch 122/300
2/2 [==============================] - 0s 76ms/step - loss: 0.0014
Epoch 123/300
2/2 [==============================] - 0s 62ms/step - loss: 0.0012
Epoch 124/300
2/2 [==============================] - 0s 69ms/step - loss: 0.0017
Epoch 125/300
2/2 [==============================] - 0s 76ms/step - loss: 0.0013
Epoch 126/300
2/2 [==============================] - 0s 76ms/step - loss: 0.0026
Epoch 127/300
2/2 [==============================] - 0s 49ms/step - loss: 0.0056
Epoch 128/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0025
Epoch 129/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0018
Epoch 130/300
2/2 [==============================] - 0s 39ms/step - loss: 0.0027
Epoch 131/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0012
Epoch 132/300
2/2 [==============================] - 0s 41ms/step - loss: 0.0022
Epoch 133/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0013
Epoch 134/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0017
Epoch 135/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0019
Epoch 136/300
2/2 [==============================] - 0s 43ms/step - loss: 0.0011
Epoch 137/300
2/2 [==============================] - 0s 36ms/step - loss: 0.0021
Epoch 138/300
2/2 [==============================] - 0s 46ms/step - loss: 0.0023
Epoch 139/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0013
Epoch 140/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0017
Epoch 141/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0018
Epoch 142/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0013
Epoch 143/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0013
Epoch 144/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0016
Epoch 145/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0014
Epoch 146/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0013
Epoch 147/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0015
Epoch 148/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0013
Epoch 149/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0014
Epoch 150/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0012
Epoch 151/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0011
Epoch 152/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0011
Epoch 153/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0013
Epoch 154/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0012
Epoch 155/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0013
Epoch 156/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0012
Epoch 157/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0015
Epoch 158/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0012
Epoch 159/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0017
Epoch 160/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0027
Epoch 161/300
2/2 [==============================] - 0s 46ms/step - loss: 0.0017
Epoch 162/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0016
Epoch 163/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0019
Epoch 164/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0012
Epoch 165/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0027
Epoch 166/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0013
Epoch 167/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0020
Epoch 168/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0024
Epoch 169/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0013
Epoch 170/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0014
Epoch 171/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0014
Epoch 172/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0011
Epoch 173/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0013
Epoch 174/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0015
Epoch 175/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0011
Epoch 176/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0019
Epoch 177/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0019
Epoch 178/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0014
Epoch 179/300
2/2 [==============================] - 0s 76ms/step - loss: 0.0012
Epoch 180/300
2/2 [==============================] - 0s 89ms/step - loss: 0.0012
Epoch 181/300
2/2 [==============================] - 0s 50ms/step - loss: 0.0012
Epoch 182/300
2/2 [==============================] - 0s 61ms/step - loss: 0.0012
Epoch 183/300
2/2 [==============================] - 0s 82ms/step - loss: 0.0012
Epoch 184/300
2/2 [==============================] - 0s 60ms/step - loss: 0.0015
Epoch 185/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0012
Epoch 186/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0014
Epoch 187/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0015
Epoch 188/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0013
Epoch 189/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0013
Epoch 190/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0015
Epoch 191/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0011
Epoch 192/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0014
Epoch 193/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0012
Epoch 194/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0016
Epoch 195/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0022
Epoch 196/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0011
Epoch 197/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0020
Epoch 198/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0013
Epoch 199/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0019
Epoch 200/300
2/2 [==============================] - 0s 48ms/step - loss: 0.0028
Epoch 201/300
2/2 [==============================] - 0s 36ms/step - loss: 0.0013
Epoch 202/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0015
Epoch 203/300
2/2 [==============================] - 0s 54ms/step - loss: 0.0021
Epoch 204/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0013
Epoch 205/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0013
Epoch 206/300
2/2 [==============================] - 0s 43ms/step - loss: 0.0014
Epoch 207/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0013
Epoch 208/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0013
Epoch 209/300
2/2 [==============================] - 0s 45ms/step - loss: 0.0012
Epoch 210/300
2/2 [==============================] - 0s 38ms/step - loss: 0.0012
Epoch 211/300
2/2 [==============================] - 0s 35ms/step - loss: 0.0013
Epoch 212/300
2/2 [==============================] - 0s 101ms/step - loss: 0.0013
Epoch 213/300
2/2 [==============================] - 0s 88ms/step - loss: 0.0012
Epoch 214/300
2/2 [==============================] - 0s 55ms/step - loss: 0.0014
Epoch 215/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0021
Epoch 216/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0012
Epoch 217/300
2/2 [==============================] - 0s 41ms/step - loss: 0.0016
Epoch 218/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0012
Epoch 219/300
2/2 [==============================] - 0s 69ms/step - loss: 0.0012
Epoch 220/300
2/2 [==============================] - 0s 79ms/step - loss: 0.0012
Epoch 221/300
2/2 [==============================] - 0s 44ms/step - loss: 0.0011
Epoch 222/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0014
Epoch 223/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0013
Epoch 224/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0011
Epoch 225/300
2/2 [==============================] - 0s 35ms/step - loss: 0.0013
Epoch 226/300
2/2 [==============================] - 0s 31ms/step - loss: 9.6389e-04
Epoch 227/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0013
Epoch 228/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0019
Epoch 229/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0016
Epoch 230/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0012
Epoch 231/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0022
Epoch 232/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0016
Epoch 233/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0011
Epoch 234/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0013
Epoch 235/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0011
Epoch 236/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0011
Epoch 237/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0011
Epoch 238/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0011
Epoch 239/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0011
Epoch 240/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0010
Epoch 241/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0010
Epoch 242/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0010
Epoch 243/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0012
Epoch 244/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0010
Epoch 245/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0017
Epoch 246/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0025
Epoch 247/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0012
Epoch 248/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0016
Epoch 249/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0013
Epoch 250/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0012
Epoch 251/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0014
Epoch 252/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0012
Epoch 253/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0012
Epoch 254/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0011
Epoch 255/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0011
Epoch 256/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0011
Epoch 257/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0010
Epoch 258/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0011
Epoch 259/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0011
Epoch 260/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0011
Epoch 261/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0011
Epoch 262/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0011
Epoch 263/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0010
Epoch 264/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0010
Epoch 265/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0011
Epoch 266/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0011
Epoch 267/300
2/2 [==============================] - 0s 29ms/step - loss: 9.5714e-04
Epoch 268/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0012
Epoch 269/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0011
Epoch 270/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0010
Epoch 271/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0011
Epoch 272/300
2/2 [==============================] - 0s 36ms/step - loss: 0.0010
Epoch 273/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0011
Epoch 274/300
2/2 [==============================] - 0s 28ms/step - loss: 9.8558e-04
Epoch 275/300
2/2 [==============================] - 0s 30ms/step - loss: 9.8905e-04
Epoch 276/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0010
Epoch 277/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0018
Epoch 278/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0016
Epoch 279/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0012
Epoch 280/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0019
Epoch 281/300
2/2 [==============================] - 0s 41ms/step - loss: 0.0013
Epoch 282/300
2/2 [==============================] - 0s 60ms/step - loss: 0.0011
Epoch 283/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0014
Epoch 284/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0010
Epoch 285/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0010
Epoch 286/300
2/2 [==============================] - 0s 36ms/step - loss: 0.0011
Epoch 287/300
2/2 [==============================] - 0s 33ms/step - loss: 0.0010
Epoch 288/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0010
Epoch 289/300
2/2 [==============================] - 0s 39ms/step - loss: 0.0012
Epoch 290/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0012
Epoch 291/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0012
Epoch 292/300
2/2 [==============================] - 0s 45ms/step - loss: 0.0024
Epoch 293/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0017
Epoch 294/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0010
Epoch 295/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0021
Epoch 296/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0018
Epoch 297/300
2/2 [==============================] - 0s 37ms/step - loss: 0.0011
Epoch 298/300
2/2 [==============================] - 0s 36ms/step - loss: 0.0019
Epoch 299/300
2/2 [==============================] - 0s 46ms/step - loss: 0.0012
Epoch 300/300
2/2 [==============================] - 0s 34ms/step - loss: 0.0013

GRU¶

Simplified version of LSTM: reset gate and update gate. The reset gate decides how much of the previous hidden state to keep, and the update gate decides how much new input to incorporate into the hidden state. It requires fewer parameters.¶

Build GRU Model¶

In [45]:
GRU_model = Sequential()
GRU_model.add(GRU(units=300, return_sequences=True, input_shape=(features_set.shape[1], 1)))
GRU_model.add(Dropout(0.2))

GRU_model.add(GRU(units=100, return_sequences=True))
GRU_model.add(Dropout(0.2))

GRU_model.add(GRU(units=100, return_sequences=True))
GRU_model.add(Dropout(0.2))

GRU_model.add(GRU(units=100))
GRU_model.add(Dropout(0.2))

GRU_model.add(Dense(units = 1))

Compile GRU Model¶

In [46]:
GRU_model.compile(optimizer = 'adam', loss = 'mean_squared_error')
In [47]:
GRU_model.summary()
Model: "sequential_2"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 gru (GRU)                   (None, 60, 300)           272700    
                                                                 
 dropout_8 (Dropout)         (None, 60, 300)           0         
                                                                 
 gru_1 (GRU)                 (None, 60, 100)           120600    
                                                                 
 dropout_9 (Dropout)         (None, 60, 100)           0         
                                                                 
 gru_2 (GRU)                 (None, 60, 100)           60600     
                                                                 
 dropout_10 (Dropout)        (None, 60, 100)           0         
                                                                 
 gru_3 (GRU)                 (None, 100)               60600     
                                                                 
 dropout_11 (Dropout)        (None, 100)               0         
                                                                 
 dense_2 (Dense)             (None, 1)                 101       
                                                                 
=================================================================
Total params: 514601 (1.96 MB)
Trainable params: 514601 (1.96 MB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________

Fit GRU Model¶

In [49]:
GRU_History = GRU_model.fit(features_set, labels, epochs = 300, batch_size = 3200)
Epoch 1/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0015
Epoch 2/300
2/2 [==============================] - 0s 28ms/step - loss: 9.1099e-04
Epoch 3/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0013
Epoch 4/300
2/2 [==============================] - 0s 28ms/step - loss: 8.9488e-04
Epoch 5/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0010
Epoch 6/300
2/2 [==============================] - 0s 25ms/step - loss: 9.8030e-04
Epoch 7/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0016
Epoch 8/300
2/2 [==============================] - 0s 27ms/step - loss: 9.2135e-04
Epoch 9/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0017
Epoch 10/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0013
Epoch 11/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0019
Epoch 12/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0020
Epoch 13/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0013
Epoch 14/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0016
Epoch 15/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0010
Epoch 16/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0019
Epoch 17/300
2/2 [==============================] - 0s 25ms/step - loss: 9.9991e-04
Epoch 18/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0013
Epoch 19/300
2/2 [==============================] - 0s 27ms/step - loss: 7.9811e-04
Epoch 20/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0014
Epoch 21/300
2/2 [==============================] - 0s 27ms/step - loss: 8.3891e-04
Epoch 22/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0020
Epoch 23/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0012
Epoch 24/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0013
Epoch 25/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0010
Epoch 26/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0011
Epoch 27/300
2/2 [==============================] - 0s 25ms/step - loss: 8.1171e-04
Epoch 28/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0018
Epoch 29/300
2/2 [==============================] - 0s 25ms/step - loss: 8.8119e-04
Epoch 30/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0010
Epoch 31/300
2/2 [==============================] - 0s 27ms/step - loss: 9.2756e-04
Epoch 32/300
2/2 [==============================] - 0s 28ms/step - loss: 8.5231e-04
Epoch 33/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0016
Epoch 34/300
2/2 [==============================] - 0s 26ms/step - loss: 8.7137e-04
Epoch 35/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0015
Epoch 36/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0013
Epoch 37/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0012
Epoch 38/300
2/2 [==============================] - 0s 26ms/step - loss: 9.7022e-04
Epoch 39/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0011
Epoch 40/300
2/2 [==============================] - 0s 26ms/step - loss: 8.3904e-04
Epoch 41/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0010
Epoch 42/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0012
Epoch 43/300
2/2 [==============================] - 0s 26ms/step - loss: 8.6205e-04
Epoch 44/300
2/2 [==============================] - 0s 25ms/step - loss: 8.6171e-04
Epoch 45/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0012
Epoch 46/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0012
Epoch 47/300
2/2 [==============================] - 0s 26ms/step - loss: 8.3217e-04
Epoch 48/300
2/2 [==============================] - 0s 25ms/step - loss: 8.6658e-04
Epoch 49/300
2/2 [==============================] - 0s 25ms/step - loss: 8.8774e-04
Epoch 50/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0014
Epoch 51/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0014
Epoch 52/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0010
Epoch 53/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0015
Epoch 54/300
2/2 [==============================] - 0s 28ms/step - loss: 9.4936e-04
Epoch 55/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0026
Epoch 56/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0012
Epoch 57/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0015
Epoch 58/300
2/2 [==============================] - 0s 25ms/step - loss: 8.3054e-04
Epoch 59/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0015
Epoch 60/300
2/2 [==============================] - 0s 26ms/step - loss: 8.6574e-04
Epoch 61/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0013
Epoch 62/300
2/2 [==============================] - 0s 27ms/step - loss: 9.6525e-04
Epoch 63/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0014
Epoch 64/300
2/2 [==============================] - 0s 25ms/step - loss: 8.9004e-04
Epoch 65/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0010
Epoch 66/300
2/2 [==============================] - 0s 25ms/step - loss: 8.4782e-04
Epoch 67/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0013
Epoch 68/300
2/2 [==============================] - 0s 24ms/step - loss: 9.2502e-04
Epoch 69/300
2/2 [==============================] - 0s 25ms/step - loss: 8.1751e-04
Epoch 70/300
2/2 [==============================] - 0s 27ms/step - loss: 8.3758e-04
Epoch 71/300
2/2 [==============================] - 0s 27ms/step - loss: 9.8058e-04
Epoch 72/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0010
Epoch 73/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0011
Epoch 74/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0023
Epoch 75/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0013
Epoch 76/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0031
Epoch 77/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0011
Epoch 78/300
2/2 [==============================] - 0s 31ms/step - loss: 0.0021
Epoch 79/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0012
Epoch 80/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0013
Epoch 81/300
2/2 [==============================] - 0s 29ms/step - loss: 8.8027e-04
Epoch 82/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0010
Epoch 83/300
2/2 [==============================] - 0s 24ms/step - loss: 8.8506e-04
Epoch 84/300
2/2 [==============================] - 0s 25ms/step - loss: 7.2732e-04
Epoch 85/300
2/2 [==============================] - 0s 25ms/step - loss: 9.6369e-04
Epoch 86/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0010
Epoch 87/300
2/2 [==============================] - 0s 27ms/step - loss: 8.2092e-04
Epoch 88/300
2/2 [==============================] - 0s 24ms/step - loss: 9.1392e-04
Epoch 89/300
2/2 [==============================] - 0s 28ms/step - loss: 8.8695e-04
Epoch 90/300
2/2 [==============================] - 0s 37ms/step - loss: 8.8345e-04
Epoch 91/300
2/2 [==============================] - 0s 26ms/step - loss: 9.1133e-04
Epoch 92/300
2/2 [==============================] - 0s 27ms/step - loss: 8.7299e-04
Epoch 93/300
2/2 [==============================] - 0s 26ms/step - loss: 9.2683e-04
Epoch 94/300
2/2 [==============================] - 0s 26ms/step - loss: 7.7390e-04
Epoch 95/300
2/2 [==============================] - 0s 28ms/step - loss: 7.6670e-04
Epoch 96/300
2/2 [==============================] - 0s 25ms/step - loss: 8.1387e-04
Epoch 97/300
2/2 [==============================] - 0s 42ms/step - loss: 8.1719e-04
Epoch 98/300
2/2 [==============================] - 0s 45ms/step - loss: 9.2919e-04
Epoch 99/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0011
Epoch 100/300
2/2 [==============================] - 0s 30ms/step - loss: 8.4066e-04
Epoch 101/300
2/2 [==============================] - 0s 32ms/step - loss: 7.7987e-04
Epoch 102/300
2/2 [==============================] - 0s 29ms/step - loss: 7.5439e-04
Epoch 103/300
2/2 [==============================] - 0s 29ms/step - loss: 7.8905e-04
Epoch 104/300
2/2 [==============================] - 0s 26ms/step - loss: 8.0700e-04
Epoch 105/300
2/2 [==============================] - 0s 27ms/step - loss: 8.4674e-04
Epoch 106/300
2/2 [==============================] - 0s 25ms/step - loss: 7.9269e-04
Epoch 107/300
2/2 [==============================] - 0s 23ms/step - loss: 7.6097e-04
Epoch 108/300
2/2 [==============================] - 0s 27ms/step - loss: 9.7291e-04
Epoch 109/300
2/2 [==============================] - 0s 26ms/step - loss: 8.9301e-04
Epoch 110/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0012
Epoch 111/300
2/2 [==============================] - 0s 28ms/step - loss: 9.4533e-04
Epoch 112/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0020
Epoch 113/300
2/2 [==============================] - 0s 25ms/step - loss: 9.9106e-04
Epoch 114/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0011
Epoch 115/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0012
Epoch 116/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0011
Epoch 117/300
2/2 [==============================] - 0s 26ms/step - loss: 9.8360e-04
Epoch 118/300
2/2 [==============================] - 0s 79ms/step - loss: 8.8835e-04
Epoch 119/300
2/2 [==============================] - 0s 40ms/step - loss: 0.0011
Epoch 120/300
2/2 [==============================] - 0s 28ms/step - loss: 9.1511e-04
Epoch 121/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0011
Epoch 122/300
2/2 [==============================] - 0s 26ms/step - loss: 9.0594e-04
Epoch 123/300
2/2 [==============================] - 0s 27ms/step - loss: 8.9259e-04
Epoch 124/300
2/2 [==============================] - 0s 26ms/step - loss: 8.9441e-04
Epoch 125/300
2/2 [==============================] - 0s 28ms/step - loss: 9.4429e-04
Epoch 126/300
2/2 [==============================] - 0s 26ms/step - loss: 8.8247e-04
Epoch 127/300
2/2 [==============================] - 0s 26ms/step - loss: 9.8742e-04
Epoch 128/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0011
Epoch 129/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0040
Epoch 130/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0013
Epoch 131/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0013
Epoch 132/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0013
Epoch 133/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0012
Epoch 134/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0015
Epoch 135/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0018
Epoch 136/300
2/2 [==============================] - 0s 26ms/step - loss: 7.9454e-04
Epoch 137/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0014
Epoch 138/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0012
Epoch 139/300
2/2 [==============================] - 0s 28ms/step - loss: 7.4842e-04
Epoch 140/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0011
Epoch 141/300
2/2 [==============================] - 0s 26ms/step - loss: 9.3591e-04
Epoch 142/300
2/2 [==============================] - 0s 26ms/step - loss: 7.8746e-04
Epoch 143/300
2/2 [==============================] - 0s 27ms/step - loss: 8.3400e-04
Epoch 144/300
2/2 [==============================] - 0s 25ms/step - loss: 7.6791e-04
Epoch 145/300
2/2 [==============================] - 0s 25ms/step - loss: 9.7392e-04
Epoch 146/300
2/2 [==============================] - 0s 26ms/step - loss: 7.7502e-04
Epoch 147/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0012
Epoch 148/300
2/2 [==============================] - 0s 24ms/step - loss: 9.0636e-04
Epoch 149/300
2/2 [==============================] - 0s 29ms/step - loss: 9.2718e-04
Epoch 150/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0011
Epoch 151/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0013
Epoch 152/300
2/2 [==============================] - 0s 28ms/step - loss: 8.5817e-04
Epoch 153/300
2/2 [==============================] - 0s 25ms/step - loss: 8.6383e-04
Epoch 154/300
2/2 [==============================] - 0s 28ms/step - loss: 9.2905e-04
Epoch 155/300
2/2 [==============================] - 0s 26ms/step - loss: 8.5568e-04
Epoch 156/300
2/2 [==============================] - 0s 26ms/step - loss: 7.1449e-04
Epoch 157/300
2/2 [==============================] - 0s 25ms/step - loss: 8.4387e-04
Epoch 158/300
2/2 [==============================] - 0s 25ms/step - loss: 9.0697e-04
Epoch 159/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0010
Epoch 160/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0013
Epoch 161/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0018
Epoch 162/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0012
Epoch 163/300
2/2 [==============================] - 0s 30ms/step - loss: 0.0016
Epoch 164/300
2/2 [==============================] - 0s 32ms/step - loss: 0.0011
Epoch 165/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0015
Epoch 166/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0012
Epoch 167/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0019
Epoch 168/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0011
Epoch 169/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0018
Epoch 170/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0012
Epoch 171/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0013
Epoch 172/300
2/2 [==============================] - 0s 26ms/step - loss: 9.2894e-04
Epoch 173/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0010
Epoch 174/300
2/2 [==============================] - 0s 26ms/step - loss: 9.5806e-04
Epoch 175/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0012
Epoch 176/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0017
Epoch 177/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0011
Epoch 178/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0011
Epoch 179/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0011
Epoch 180/300
2/2 [==============================] - 0s 25ms/step - loss: 8.3284e-04
Epoch 181/300
2/2 [==============================] - 0s 25ms/step - loss: 8.5115e-04
Epoch 182/300
2/2 [==============================] - 0s 27ms/step - loss: 8.2456e-04
Epoch 183/300
2/2 [==============================] - 0s 25ms/step - loss: 9.9511e-04
Epoch 184/300
2/2 [==============================] - 0s 25ms/step - loss: 8.9341e-04
Epoch 185/300
2/2 [==============================] - 0s 26ms/step - loss: 8.9302e-04
Epoch 186/300
2/2 [==============================] - 0s 27ms/step - loss: 8.2447e-04
Epoch 187/300
2/2 [==============================] - 0s 29ms/step - loss: 7.4186e-04
Epoch 188/300
2/2 [==============================] - 0s 26ms/step - loss: 8.4731e-04
Epoch 189/300
2/2 [==============================] - 0s 25ms/step - loss: 8.7227e-04
Epoch 190/300
2/2 [==============================] - 0s 25ms/step - loss: 6.6852e-04
Epoch 191/300
2/2 [==============================] - 0s 25ms/step - loss: 7.4747e-04
Epoch 192/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0013
Epoch 193/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0011
Epoch 194/300
2/2 [==============================] - 0s 24ms/step - loss: 9.1299e-04
Epoch 195/300
2/2 [==============================] - 0s 26ms/step - loss: 7.6903e-04
Epoch 196/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0014
Epoch 197/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0013
Epoch 198/300
2/2 [==============================] - 0s 27ms/step - loss: 9.2971e-04
Epoch 199/300
2/2 [==============================] - 0s 24ms/step - loss: 9.1601e-04
Epoch 200/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0012
Epoch 201/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0010
Epoch 202/300
2/2 [==============================] - 0s 26ms/step - loss: 7.9099e-04
Epoch 203/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0011
Epoch 204/300
2/2 [==============================] - 0s 27ms/step - loss: 8.9596e-04
Epoch 205/300
2/2 [==============================] - 0s 26ms/step - loss: 7.2944e-04
Epoch 206/300
2/2 [==============================] - 0s 28ms/step - loss: 7.7591e-04
Epoch 207/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0011
Epoch 208/300
2/2 [==============================] - 0s 27ms/step - loss: 7.6260e-04
Epoch 209/300
2/2 [==============================] - 0s 25ms/step - loss: 9.1872e-04
Epoch 210/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0011
Epoch 211/300
2/2 [==============================] - 0s 27ms/step - loss: 8.5457e-04
Epoch 212/300
2/2 [==============================] - 0s 26ms/step - loss: 8.6778e-04
Epoch 213/300
2/2 [==============================] - 0s 25ms/step - loss: 8.9113e-04
Epoch 214/300
2/2 [==============================] - 0s 25ms/step - loss: 7.8802e-04
Epoch 215/300
2/2 [==============================] - 0s 28ms/step - loss: 8.8206e-04
Epoch 216/300
2/2 [==============================] - 0s 29ms/step - loss: 9.7884e-04
Epoch 217/300
2/2 [==============================] - 0s 28ms/step - loss: 7.7089e-04
Epoch 218/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0011
Epoch 219/300
2/2 [==============================] - 0s 31ms/step - loss: 9.4775e-04
Epoch 220/300
2/2 [==============================] - 0s 26ms/step - loss: 9.0550e-04
Epoch 221/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0010
Epoch 222/300
2/2 [==============================] - 0s 25ms/step - loss: 8.9265e-04
Epoch 223/300
2/2 [==============================] - 0s 25ms/step - loss: 8.9822e-04
Epoch 224/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0013
Epoch 225/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0015
Epoch 226/300
2/2 [==============================] - 0s 26ms/step - loss: 9.6624e-04
Epoch 227/300
2/2 [==============================] - 0s 24ms/step - loss: 8.3240e-04
Epoch 228/300
2/2 [==============================] - 0s 29ms/step - loss: 9.8412e-04
Epoch 229/300
2/2 [==============================] - 0s 25ms/step - loss: 8.1253e-04
Epoch 230/300
2/2 [==============================] - 0s 25ms/step - loss: 7.0318e-04
Epoch 231/300
2/2 [==============================] - 0s 24ms/step - loss: 8.4021e-04
Epoch 232/300
2/2 [==============================] - 0s 28ms/step - loss: 7.3491e-04
Epoch 233/300
2/2 [==============================] - 0s 27ms/step - loss: 7.1771e-04
Epoch 234/300
2/2 [==============================] - 0s 25ms/step - loss: 8.4999e-04
Epoch 235/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0015
Epoch 236/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0011
Epoch 237/300
2/2 [==============================] - 0s 25ms/step - loss: 7.9317e-04
Epoch 238/300
2/2 [==============================] - 0s 28ms/step - loss: 9.1153e-04
Epoch 239/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0010
Epoch 240/300
2/2 [==============================] - 0s 27ms/step - loss: 7.6901e-04
Epoch 241/300
2/2 [==============================] - 0s 26ms/step - loss: 9.5661e-04
Epoch 242/300
2/2 [==============================] - 0s 28ms/step - loss: 8.7541e-04
Epoch 243/300
2/2 [==============================] - 0s 27ms/step - loss: 9.6288e-04
Epoch 244/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0011
Epoch 245/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0011
Epoch 246/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0015
Epoch 247/300
2/2 [==============================] - 0s 47ms/step - loss: 0.0011
Epoch 248/300
2/2 [==============================] - 0s 44ms/step - loss: 0.0010
Epoch 249/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0012
Epoch 250/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0014
Epoch 251/300
2/2 [==============================] - 0s 24ms/step - loss: 8.9917e-04
Epoch 252/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0015
Epoch 253/300
2/2 [==============================] - 0s 26ms/step - loss: 6.7499e-04
Epoch 254/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0025
Epoch 255/300
2/2 [==============================] - 0s 56ms/step - loss: 0.0012
Epoch 256/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0015
Epoch 257/300
2/2 [==============================] - 0s 28ms/step - loss: 8.4359e-04
Epoch 258/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0018
Epoch 259/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0014
Epoch 260/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0011
Epoch 261/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0013
Epoch 262/300
2/2 [==============================] - 0s 24ms/step - loss: 0.0014
Epoch 263/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0013
Epoch 264/300
2/2 [==============================] - 0s 24ms/step - loss: 8.6234e-04
Epoch 265/300
2/2 [==============================] - 0s 29ms/step - loss: 0.0019
Epoch 266/300
2/2 [==============================] - 0s 27ms/step - loss: 8.8534e-04
Epoch 267/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0018
Epoch 268/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0021
Epoch 269/300
2/2 [==============================] - 0s 28ms/step - loss: 0.0010
Epoch 270/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0020
Epoch 271/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0012
Epoch 272/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0015
Epoch 273/300
2/2 [==============================] - 0s 25ms/step - loss: 9.8750e-04
Epoch 274/300
2/2 [==============================] - 0s 26ms/step - loss: 9.6379e-04
Epoch 275/300
2/2 [==============================] - 0s 26ms/step - loss: 7.9564e-04
Epoch 276/300
2/2 [==============================] - 0s 27ms/step - loss: 7.8559e-04
Epoch 277/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0016
Epoch 278/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0012
Epoch 279/300
2/2 [==============================] - 0s 27ms/step - loss: 8.1380e-04
Epoch 280/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0012
Epoch 281/300
2/2 [==============================] - 0s 27ms/step - loss: 8.8170e-04
Epoch 282/300
2/2 [==============================] - 0s 26ms/step - loss: 9.4164e-04
Epoch 283/300
2/2 [==============================] - 0s 27ms/step - loss: 0.0012
Epoch 284/300
2/2 [==============================] - 0s 24ms/step - loss: 8.7087e-04
Epoch 285/300
2/2 [==============================] - 0s 25ms/step - loss: 7.6947e-04
Epoch 286/300
2/2 [==============================] - 0s 30ms/step - loss: 8.2362e-04
Epoch 287/300
2/2 [==============================] - 0s 26ms/step - loss: 8.8744e-04
Epoch 288/300
2/2 [==============================] - 0s 32ms/step - loss: 7.4360e-04
Epoch 289/300
2/2 [==============================] - 0s 24ms/step - loss: 7.9196e-04
Epoch 290/300
2/2 [==============================] - 0s 25ms/step - loss: 6.8367e-04
Epoch 291/300
2/2 [==============================] - 0s 24ms/step - loss: 8.5388e-04
Epoch 292/300
2/2 [==============================] - 0s 29ms/step - loss: 7.4762e-04
Epoch 293/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0011
Epoch 294/300
2/2 [==============================] - 0s 27ms/step - loss: 9.3496e-04
Epoch 295/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0010
Epoch 296/300
2/2 [==============================] - 0s 24ms/step - loss: 6.7403e-04
Epoch 297/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0023
Epoch 298/300
2/2 [==============================] - 0s 26ms/step - loss: 0.0017
Epoch 299/300
2/2 [==============================] - 0s 25ms/step - loss: 0.0013
Epoch 300/300
2/2 [==============================] - 0s 25ms/step - loss: 9.3637e-04

Evaluate The Model¶

Evaluate Model Training¶

In [50]:
fig = go.Figure()

# Training loss trace
fig.add_trace(go.Scatter(x=list(range(1, len(RNN_History.history['loss']) + 1)), 
                         y=RNN_History.history['loss'],
                         mode='lines',
                         name='RNN Training Loss'))
fig.add_trace(go.Scatter(x=list(range(1, len(LSTM_History.history['loss']) + 1)), 
                         y=LSTM_History.history['loss'],
                         mode='lines',
                         name='LSTM Training Loss'))
fig.add_trace(go.Scatter(x=list(range(1, len(GRU_History.history['loss']) + 1)), 
                         y=GRU_History.history['loss'],
                         mode='lines',
                         name='GRU Training Loss'))

fig.update_layout(title='Training and Validation Loss',
                  xaxis_title='Epoch',
                  yaxis_title='Loss',
                  legend_title='Loss Type',
                  hovermode='x',
                  hoverlabel=dict(bgcolor='white', font_size=12, font_family='Rockwell'),
                  template='plotly_white')

fig.show()

Evaluate Model Prediction¶

Using cleaned test dataset¶
In [57]:
google_testing_complete = pd.read_csv("./Google_Stock_Test (2023).csv")
google_testing_processed = google_testing_complete.iloc[:, 1:2].values

google_total = pd.concat((google_training_complete['Close'], google_testing_complete['Close']), axis=0)

test_inputs = google_total[len(google_total) - len(google_testing_complete) - 60:].values
test_inputs
Out[57]:
array([101.419998,  98.68    ,  97.860001,  97.18    ,  97.559998,
        99.059998,  96.559998,  99.970001, 100.769997,  99.629997,
        99.970001, 101.129997, 102.519997, 104.480003,  94.93    ,
        92.220001,  96.290001,  94.510002,  90.470001,  86.970001,
        83.43    ,  86.580002,  88.489998,  88.900002,  87.32    ,
        93.940002,  96.410004,  95.699997,  98.440002,  98.849998,
        98.360001,  97.43    ,  95.599998,  97.050003,  98.459999,
        97.459999,  96.050003,  95.190002, 100.989998, 100.989998,
       100.440002,  99.480003,  96.980003,  94.940002,  93.709999,
        92.830002,  93.309998,  95.629997,  95.07    ,  90.860001,
        90.260002,  88.440002,  89.019997,  89.580002,  87.760002,
        89.230003,  87.389999,  86.019997,  88.449997,  88.230003,
        89.120003,  88.080002,  86.199997,  87.339996,  88.019997,
        88.419998,  91.519997,  91.129997,  92.120003,  91.290001,
        91.120003,  93.050003,  98.019997,  99.790001,  97.699997,
        95.220001,  97.519997,  99.370003,  96.940002,  98.839996,
       100.43    , 107.739998, 104.779999, 102.900002, 107.639999,
        99.370003,  95.010002,  94.57    ,  94.610001,  94.68    ,
        96.940002,  95.510002,  94.349998,  91.790001,  91.650002,
        90.889999,  89.129997,  89.870003,  90.059998,  90.360001,
        92.      ,  93.650002,  95.129997,  93.860001,  94.25    ,
        92.32    ,  90.629997,  91.110001,  93.970001,  96.110001,
       100.32    , 101.620003, 101.220001, 104.919998, 103.370003,
       105.599998, 105.440002, 102.459999, 101.029999, 101.389999,
       100.889999, 103.730003, 104.360001, 104.720001, 104.470001,
       108.419998, 106.440002, 105.349998, 104.639999, 107.43    ,
       108.870003, 105.970001, 104.5     , 104.18    , 105.290001,
       105.410004, 105.970001, 103.849998, 103.709999, 107.589996,
       107.339996, 107.199997, 105.32    , 105.410004, 104.690002,
       105.57    , 107.769997, 107.349998, 111.75    , 116.57    ,
       117.510002, 116.510002, 119.510002, 120.839996, 122.830002,
       122.760002, 125.050003, 122.559998, 120.900002, 123.480003,
       124.610001, 123.669998, 122.870003, 123.720001, 124.669998,
       126.010002, 127.309998, 122.5     , 122.139999, 122.230003,
       123.639999, 123.830002, 123.669998, 125.089996, 123.529999,
       123.099998, 120.550003, 123.150002, 122.339996, 118.339996,
       118.330002, 120.18    , 119.099998, 119.699997, 119.900002,
       121.75    , 120.110001, 119.480003, 116.449997, 117.139999,
       118.93    , 124.540001, 125.419998, 124.650002, 123.760002,
       122.029999, 119.199997, 120.019997, 121.529999, 122.209999,
       129.270004, 129.399994, 132.580002])
In [58]:
test_inputs = test_inputs.reshape(-1,1)
test_inputs = scaler.transform(test_inputs)

test_features = []
for i in range(60, len(test_inputs)):
    test_features.append(test_inputs[i-60:i, 0])
    
test_features = np.array(test_features)
test_features = np.reshape(test_features, (test_features.shape[0], test_features.shape[1], 1))
test_features.shape
Out[58]:
(143, 60, 1)
In [59]:
# Test the model on the test set
## RNN
RNN_test_loss = RNN_model.evaluate(test_features, google_testing_processed)
print(f'RNN Test Loss: {RNN_test_loss}')

## LSTM
LSTM_test_loss = LSTM_model.evaluate(test_features, google_testing_processed)
print(f'LSTM Test Loss: {LSTM_test_loss}')

## GRU
GRU_test_loss = GRU_model.evaluate(test_features, google_testing_processed)
print(f'LSTM Test Loss: {GRU_test_loss}')
5/5 [==============================] - 1s 66ms/step - loss: 11545.4990
RNN Test Loss: 11545.4990234375
5/5 [==============================] - 1s 16ms/step - loss: 11550.0293
LSTM Test Loss: 11550.029296875
5/5 [==============================] - 1s 18ms/step - loss: 11575.7695
LSTM Test Loss: 11575.76953125

Forecast Markets¶

In [60]:
# RNN
RNN_predictions = RNN_model.predict(test_features)
RNN_predictions = scaler.inverse_transform(RNN_predictions)

# LSTM
LSTM_predictions = LSTM_model.predict(test_features)
LSTM_predictions = scaler.inverse_transform(LSTM_predictions)

# GRU
GRU_predictions = GRU_model.predict(test_features)
GRU_predictions = scaler.inverse_transform(GRU_predictions)
5/5 [==============================] - 1s 51ms/step
5/5 [==============================] - 1s 14ms/step
5/5 [==============================] - 1s 10ms/step
In [61]:
google_testing_complete
Out[61]:
Date Open High Low Close Adj Close Volume
0 2023-01-03 89.589996 91.050003 88.519997 89.120003 89.120003 28131200
1 2023-01-04 90.349998 90.650002 87.269997 88.080002 88.080002 34854800
2 2023-01-05 87.470001 87.570000 85.900002 86.199997 86.199997 27194400
3 2023-01-06 86.790001 87.690002 84.860001 87.339996 87.339996 41381500
4 2023-01-09 88.360001 90.050003 87.860001 88.019997 88.019997 29003900
... ... ... ... ... ... ... ...
138 2023-07-24 121.660004 123.000000 120.980003 121.529999 121.529999 29686100
139 2023-07-25 121.360001 123.150002 121.019997 122.209999 122.209999 52509600
140 2023-07-26 130.070007 130.979996 128.320007 129.270004 129.270004 61682100
141 2023-07-27 131.669998 133.240005 128.789993 129.399994 129.399994 44952100
142 2023-07-28 130.779999 133.740005 130.570007 132.580002 132.580002 36572900

143 rows × 7 columns

In [62]:
google_prediction = google_testing_complete['Date']
google_prediction = pd.DataFrame(google_prediction)
google_prediction['RNN_Open'] = RNN_predictions
google_prediction['LSTM_Open'] = LSTM_predictions
google_prediction['GRU_Open'] = GRU_predictions

google_prediction
Out[62]:
Date RNN_Open LSTM_Open GRU_Open
0 2023-01-03 89.519333 98.401093 78.831085
1 2023-01-04 89.489113 97.782120 79.041344
2 2023-01-05 89.636574 97.219818 79.156418
3 2023-01-06 89.770462 96.714958 78.887222
4 2023-01-09 89.799355 96.264481 78.604805
... ... ... ... ...
138 2023-07-24 136.945770 127.663452 108.560730
139 2023-07-25 136.781494 127.848602 108.264091
140 2023-07-26 136.534897 128.019958 108.350693
141 2023-07-27 136.514725 128.205750 109.546196
142 2023-07-28 136.901825 128.438797 111.187195

143 rows × 4 columns

In [64]:
fig = go.Figure()

# Actual Apple stock price trace
fig.add_trace(go.Scatter(x=google_testing_complete.Date,
                         y=google_testing_complete.Open,
                         mode='lines',
                         name='Actual Google Stock Price',
                         line=dict(color='blue')))

# RNN Predicted Apple stock price trace
fig.add_trace(go.Scatter(x=google_prediction.Date,
                         y=google_prediction.RNN_Open,
                         mode='lines',
                         name='RNN Predicted Google Stock Price',
                         line=dict(color='red')))

# LSTM Predicted Apple stock price trace
fig.add_trace(go.Scatter(x=google_prediction.Date,
                         y=google_prediction.LSTM_Open,
                         mode='lines',
                         name='LSTM Predicted Google Stock Price',
                         line=dict(color='green')))

# LSTM Predicted Apple stock price trace
fig.add_trace(go.Scatter(x=google_prediction.Date,
                         y=google_prediction.GRU_Open,
                         mode='lines',
                         name='GRU Predicted Google Stock Price',
                         line=dict(color='yellow')))

fig.update_layout(title='Google Stock Price Forecast',
                  xaxis_title='Date',
                  yaxis_title='Google Stock Price',
                  legend_title='Price Type',
                  hovermode='x',
                  hoverlabel=dict(bgcolor='white', font_size=12, font_family='Rockwell'),
                  template='plotly_white')

fig.show()
In [ ]: